Front page example (XGBoost)

The code from the front page example using XGBoost.


In [1]:
import xgboost
import shap

# train XGBoost model
X,y = shap.datasets.boston()
model = xgboost.XGBRegressor().fit(X, y)

# explain the model's predictions using SHAP values
# (same syntax works for LightGBM, CatBoost, and scikit-learn models)
background = shap.maskers.TabularIndependent(X, sample=100)
explainer = shap.Explainer(model, background)
shap_values = explainer(X)

# visualize the first prediction's explanation
shap.plots.waterfall(shap_values[0])



In [2]:
# plot the global importance of each feature
shap.plots.bar(shap_values[0], show=False)



In [3]:
shap.plots.initjs()

# visualize the first prediction's explanation
shap.plots.force(shap_values[0])


Out[3]:
14.3416.3418.3420.3422.3424.3426.3428.3430.34AGE = 65.2PTRATIO = 15.3INDUS = 2.31LSTAT = 4.98DIS = 4.09CRIM = 0.00632RM = 6.575RAD = 1TAX = 296NOX = 0.538base value24.0224.02highermodel output valuelower

In [4]:
# visualize the first prediction's explanation
shap.plots.force(shap_values)


Out[4]:
050100150200250300350400450500-7.657-2.6572.3437.34312.3417.3422.3427.3432.3437.3442.3447.3452.34

In [5]:
# plot the importance of a single feature across all samples
shap.plots.dependence(shap_values[:,"RM"], color=shap_values)



In [6]:
# plot the global importance of each feature
shap.plots.bar(shap_values)



In [7]:
# plot the distribution of importances for each feature over all samples
shap.plots.summary(shap_values)